Support Vector Machines (SVM)¶

Introduction¶

W3sicXVlc3Rpb24iOiAiV2hpY2ggc3RhdGVtZW50cyBhY2N1cmF0ZWx5IGRlc2NyaWJlIHRoZSBtYXhpbWFsIG1hcmdpbiBjbGFzc2lmaWVyPyIsICJ0eXBlIjogIm1hbnlfY2hvaWNlIiwgImFuc3dlcnMiOiBbeyJhbnN3ZXIiOiAiSXQgaXMgZGVzaWduZWQgZm9yIG5vbi1saW5lYXJseSBzZXBhcmFibGUgZGF0YS4iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIiAgSXQgbWF4aW1pemVzIHRoZSBtYXJnaW4gYmV0d2VlbiB0d28gY2xhc3Nlcy4iLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9LCB7ImFuc3dlciI6ICIgIEl0IHRlbmRzIHRvIG92ZXJmaXQgYW5kIGlzIGhlYXZpbHkgcmVsaWFudCBvbiBzdXBwb3J0IHZlY3RvcnMuIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiSXQgY2FuIGhhbmRsZSBsYXJnZSBkYXRhc2V0cyBlZmZpY2llbnRseS4iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn1dfV0=

W3sicXVlc3Rpb24iOiAiV2hhdCBhcmUgY2hhcmFjdGVyaXN0aWNzIG9mIHRoZSBzdXBwb3J0IHZlY3RvciBjbGFzc2lmaWVyPyIsICJ0eXBlIjogIm1hbnlfY2hvaWNlIiwgImFuc3dlcnMiOiBbeyJhbnN3ZXIiOiAiSXQgbWluaW1pemVzIHRoZSBtYXJnaW4gYmV0d2VlbiBjbGFzc2VzLiIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiIEl0IGlzIGxlc3Mgc2Vuc2l0aXZlIHRvIG5vaXNlIGluIHRoZSBkYXRhLiIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiSXQgYWxsb3dzIGZvciBzb21lIG1pc2NsYXNzaWZpY2F0aW9uLCBrbm93biBhcyB0aGUgJ1NvZnQgTWFyZ2luIENsYXNzaWZpZXIuJyAiLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9LCB7ImFuc3dlciI6ICJJdCBpcyBhbiBleHRlbnNpb24gb2YgdGhlIG1heGltYWwgbWFyZ2luIGNsYXNzaWZpZXIuIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifV19XQ==

W3sicXVlc3Rpb24iOiAiV2hhdCBhZHZhbnRhZ2VzIGFyZSBhc3NvY2lhdGVkIHdpdGggdGhlIHN1cHBvcnQgdmVjdG9yIG1hY2hpbmUgY29tcGFyZWQgdG8gdGhlIHN1cHBvcnQgdmVjdG9yIGNsYXNzaWZpZXI/IiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICIgSXQgY2FuIGhhbmRsZSBub24tbGluZWFybHkgc2VwYXJhYmxlIGRhdGEiLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9LCB7ImFuc3dlciI6ICIgIEl0IGlzIGxlc3Mgcm9idXN0IHRvIG5vaXNlIGluIHRoZSBkYXRhLiIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiSXQgY2FuIGhhbmRsZSBsYXJnZSBkYXRhc2V0cy4iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIkl0IHVzZXMgYSBrZXJuZWwgZnVuY3Rpb24gdG8gdHJhbnNmb3JtIGlucHV0IGRhdGEuIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifV19XQ=='

W3sicXVlc3Rpb24iOiAiV2hhdCBkb2VzIFNWTSBzdGFuZCBmb3I/IiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJTaW1wbGUgVmVjdG9yIE1hY2hpbmUiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIiBTdXBwb3J0IFZlY3RvciBNYWNoaW5lIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiIFN1cGVydmlzZWQgVmVjdG9yIE1vZGVsIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJTaW5ndWxhciBWYWx1ZSBNZXRob2QiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn1dfV0=

W3sicXVlc3Rpb24iOiAiSW4gU1ZNLCB3aGF0IGlzIGEgc3VwcG9ydCB2ZWN0b3I/IiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJBbnkgZGF0YSBwb2ludCBpbiB0aGUgZGF0YXNldCIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiIERhdGEgcG9pbnRzIHRoYXQgbGllIGNsb3Nlc3QgdG8gdGhlIGRlY2lzaW9uIGJvdW5kYXJ5IiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiIE91dGxpZXJzIGluIHRoZSBkYXRhc2V0IiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJBbGwgZGF0YSBwb2ludHMgZXF1YWxseSBjb250cmlidXRlIHRvIHRoZSBtb2RlbCIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifV19XQ==

W3sicXVlc3Rpb24iOiAiV2hpY2ggb2YgdGhlIGZvbGxvd2luZyBhcmUgY2hhcmFjdGVyaXN0aWNzIG9mIHN1cHBvcnQgdmVjdG9ycyBpbiBTVk0/IChTZWxlY3QgYWxsIHRoYXQgYXBwbHkpIiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJMaWUgY2xvc2VzdCB0byB0aGUgZGVjaXNpb24gYm91bmRhcnkiLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9LCB7ImFuc3dlciI6ICIgT3V0bGllcnMgaW4gdGhlIGRhdGFzZXQiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIiBEZXRlcm1pbmUgdGhlIHBvc2l0aW9uIG9mIHRoZSBkZWNpc2lvbiBib3VuZGFyeSIsICJjb3JyZWN0IjogdHJ1ZSwgImZlZWRiYWNrIjogIkNvcnJlY3QuIn0sIHsiYW5zd2VyIjogIkNvbnRyaWJ1dGUgc2lnbmlmaWNhbnRseSB0byB0aGUgbW9kZWwncyBwYXJhbWV0ZXJzIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9XX1d

W3sicXVlc3Rpb24iOiAiSG93IG1hbnkgZmVhdHVyZXMgZG9lcyBhIGh5cGVycGxhbmUgaGF2ZSBpbiBhIHN1cHBvcnQgdmVjdG9yIG1hY2hpbmUgd2hlbiB0aGUgaW5wdXQgZmVhdHVyZXMgYXJlIDU/IFRpcDogKEFuc3dlciBpcyB3aG9sZSBudW1iZXIpIiwgInR5cGUiOiAibnVtZXJpYyIsICJwcmVjaXNpb24iOiAxMCwgImFuc3dlcnMiOiBbeyJ0eXBlIjogInZhbHVlIiwgInZhbHVlIjogNCwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJ0eXBlIjogInJhbmdlIiwgInJhbmdlIjogWy0xMDAwMDAwMDAsIC0xZS0wOV0sICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4gVHJ5IGFnYWluLiJ9LCB7InR5cGUiOiAicmFuZ2UiLCAicmFuZ2UiOiBbNC4wMDAwMDAxLCAxMDAwMDAwMDBdLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIFRyeSBhZ2Fpbi4ifV19XQ==

W3sicXVlc3Rpb24iOiAiV2hhdCBpcyB0aGUgcm9sZSBvZiB0aGUgZGVjaXNpb24gYm91bmRhcnkgaW4gU1ZNPyAoU2VsZWN0IGFsbCB0aGF0IGFwcGx5KSIsICJ0eXBlIjogIm1hbnlfY2hvaWNlIiwgImFuc3dlcnMiOiBbeyJhbnN3ZXIiOiAiIFNlcGFyYXRlcyBkaWZmZXJlbnQgY2xhc3NlcyIsICJjb3JyZWN0IjogdHJ1ZSwgImZlZWRiYWNrIjogIkNvcnJlY3QuIn0sIHsiYW5zd2VyIjogIiBNaW5pbWl6ZXMgdGhlIG1hcmdpbiBiZXR3ZWVuIGNsYXNzZXMiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIiAgSGFzIG5vIGltcGFjdCBvbiBjbGFzc2lmaWNhdGlvbiBhY2N1cmFjeSIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiIE1heGltaXplcyB0aGUgbWFyZ2luIGJldHdlZW4gY2xhc3NlcyIsICJjb3JyZWN0IjogdHJ1ZSwgImZlZWRiYWNrIjogIkNvcnJlY3QuIn1dfV0=

W3sicXVlc3Rpb24iOiAiV2hhdCBpcyB0aGUgZGVmYXVsdCB2YWx1ZSBvZiB0aGUgQyBwYXJhbWV0ZXIgaW4gU1ZNPyIsICJ0eXBlIjogIm51bWVyaWMiLCAicHJlY2lzaW9uIjogMCwgImFuc3dlcnMiOiBbeyJ0eXBlIjogInZhbHVlIiwgInZhbHVlIjogMSwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJ0eXBlIjogInJhbmdlIiwgInJhbmdlIjogWy0xMDAwMDAwMDAsIDBdLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIFRyeSBhZ2Fpbi4ifSwgeyJ0eXBlIjogInJhbmdlIiwgInJhbmdlIjogWzIsIDEwMDAwMDAwMF0sICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4gVHJ5IGFnYWluLiJ9XX1d

W3sicXVlc3Rpb24iOiAiV2hhdCBpcyB0aGUgcHJpbWFyeSBwdXJwb3NlIG9mIFNWTT8iLCAidHlwZSI6ICJtYW55X2Nob2ljZSIsICJhbnN3ZXJzIjogW3siYW5zd2VyIjogIkNsYXNzaWZpY2F0aW9uIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiUmVncmVzc2lvbiIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiQ2x1c3RlcmluZyIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiIERpbWVuc2lvbmFsaXR5IHJlZHVjdGlvbiIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifV19XQ==

W3sicXVlc3Rpb24iOiAiV2hpY2ggbWF0aGVtYXRpY2FsIGNvbmNlcHQgaXMgY3J1Y2lhbCBmb3IgU1ZNPyIsICJ0eXBlIjogIm1hbnlfY2hvaWNlIiwgImFuc3dlcnMiOiBbeyJhbnN3ZXIiOiAiVHJpZ29ub21ldHJ5IiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJNYXRyaXggbXVsdGlwbGljYXRpb24iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIkV4cG9uZW50aWFsIGZ1bmN0aW9uIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICIgRG90IHByb2R1Y3QiLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9XX1d

W3sicXVlc3Rpb24iOiAiV2hhdCBkb2VzIHRoZSBtYXhpbXVtIG1hcmdpbiBpbiBTVk0gcmVmZXIgdG8/IiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJUaGUgZGlzdGFuY2UgYmV0d2VlbiBzdXBwb3J0IHZlY3RvcnMiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIlRoZSBkaXN0YW5jZSBiZXR3ZWVuIHRoZSBoeXBlcnBsYW5lIGFuZCB0aGUgbmVhcmVzdCBkYXRhIHBvaW50IiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJUaGUgYW5nbGUgYmV0d2VlbiB2ZWN0b3JzIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJUaGUgd2lkdGggb2YgdGhlIGRlY2lzaW9uIGJvdW5kYXJ5IiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifV19XQ==

W3sicXVlc3Rpb24iOiAiV2hhdCBpcyB0aGUgbWluaW11bSBwb3NzaWJsZSB2YWx1ZSBmb3IgdGhlIG1hcmdpbiBpbiBTVk0/IFRpcDogKEFuc3dlciBpcyB3aG9sZSBudW1iZXIpIiwgInR5cGUiOiAibnVtZXJpYyIsICJwcmVjaXNpb24iOiAxMCwgImFuc3dlcnMiOiBbeyJ0eXBlIjogInZhbHVlIiwgInZhbHVlIjogMCwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4gVGhhdCBpcyB0aGUgbWluaW11bS4ifSwgeyJ0eXBlIjogInJhbmdlIiwgInJhbmdlIjogWy0xMDAwMDAwMDAsIC0xZS0wOV0sICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4gVHJ5IGFnYWluLiJ9LCB7InR5cGUiOiAicmFuZ2UiLCAicmFuZ2UiOiBbMWUtMDksIDEwMDAwMDAwMF0sICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4gVHJ5IGFnYWluLiJ9XX1d

W3sicXVlc3Rpb24iOiAiV2hpY2ggdHlwZSBvZiBTVk0gYWxsb3dzIGZvciBtaXNjbGFzc2lmaWNhdGlvbiBvZiBzb21lIGRhdGEgcG9pbnRzPyIsICJ0eXBlIjogIm1hbnlfY2hvaWNlIiwgImFuc3dlcnMiOiBbeyJhbnN3ZXIiOiAiSGFyZCBNYXJnaW4gU1ZNIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJMaW5lYXIgU1ZNIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJTb2Z0IE1hcmdpbiBTVk0iLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9LCB7ImFuc3dlciI6ICJOb24tbGluZWFyIFNWTSIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifV19XQ==

W3sicXVlc3Rpb24iOiAiV2hhdCBpcyB0aGUgcm9sZSBvZiB0aGUga2VybmVsIHRyaWNrIGluIFNWTT8iLCAidHlwZSI6ICJtYW55X2Nob2ljZSIsICJhbnN3ZXJzIjogW3siYW5zd2VyIjogIkl0IHNpbXBsaWZpZXMgdGhlIGRlY2lzaW9uIGJvdW5kYXJ5IiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJJdCByZWR1Y2VzIG92ZXJmaXR0aW5nIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJJdCB0cmFuc2Zvcm1zIGRhdGEgaW50byBhIGhpZ2hlci1kaW1lbnNpb25hbCBzcGFjZSIsICJjb3JyZWN0IjogdHJ1ZSwgImZlZWRiYWNrIjogIkNvcnJlY3QuIn0sIHsiYW5zd2VyIjogIkl0IGNvbXB1dGVzIHRoZSBkb3QgcHJvZHVjdCBlZmZpY2llbnRseSIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifV19XQ==

W3sicXVlc3Rpb24iOiAiV2hhdCBpcyB0aGUgcHVycG9zZSBvZiB0aGUgQyBoeXBlcnBhcmFtZXRlciBpbiBTVk0/IChTZWxlY3QgYWxsIHRoYXQgYXBwbHkpIiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJUbyBkZXRlcm1pbmUgdGhlIG51bWJlciBvZiBzdXBwb3J0IHZlY3RvcnMiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIlRvIGNvbnRyb2wgdGhlIHJlZ3VsYXJpemF0aW9uIHN0cmVuZ3RoIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiVG8gZGV0ZXJtaW5lIHRoZSBudW1iZXIgb2Ygc3VwcG9ydCB2ZWN0b3JzIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiVG8gc2V0IHRoZSBsZWFybmluZyByYXRlIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9XX1d

W3sicXVlc3Rpb24iOiAiR2l2ZW4gYSBTVk0gbW9kZWwgd2l0aCBhIGxpbmVhciBrZXJuZWwsIGlmIHRoZSBlcXVhdGlvbiBvZiB0aGUgaHlwZXJwbGFuZSBpcyA0eDEgLSAzeDIgKyAxMiA9IDAsIGNhbGN1bGF0ZSB0aGUgZGlzdGFuY2UgYmV0d2VlbiB0aGUgaHlwZXJwbGFuZSBhbmQgdGhlIG9yaWdpbiB1c2luZyB0aGUgZm9ybXVsYSBmb3IgdGhlIGRpc3RhbmNlIGZyb20gYSBwb2ludCB0byBhIHBsYW5lLiIsICJ0eXBlIjogIm51bWVyaWMiLCAicHJlY2lzaW9uIjogMTAsICJhbnN3ZXJzIjogW3sidHlwZSI6ICJ2YWx1ZSIsICJ2YWx1ZSI6IDIuNCwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4gWW91IGFyZSBwcmV0dHkgZ29vZCEifSwgeyJ0eXBlIjogInJhbmdlIiwgInJhbmdlIjogWy0xMDAwMDAwMDAsIC0xZS0wOV0sICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4gVHJ5IGFnYWluLiJ9LCB7InR5cGUiOiAicmFuZ2UiLCAicmFuZ2UiOiBbMWUtMDksIDEwMDAwMDAwMF0sICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4gVHJ5IGFnYWluLiJ9XX1d

W3sicXVlc3Rpb24iOiAiV2hpY2gga2VybmVsIGZ1bmN0aW9uIGlzIGNvbW1vbmx5IHVzZWQgZm9yIG5vbi1saW5lYXIgU1ZNcz8iLCAidHlwZSI6ICJtYW55X2Nob2ljZSIsICJhbnN3ZXJzIjogW3siYW5zd2VyIjogIkxpbmVhciBrZXJuZWwiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIlBvbHlub21pYWwga2VybmVsIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJSYWRpYWwgYmFzaXMgZnVuY3Rpb24gKFJCRikga2VybmVsIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiU2lnbW9pZCBrZXJuZWwiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn1dfV0=

W3sicXVlc3Rpb24iOiAiV2hhdCBpcyB0aGUgcHVycG9zZSBvZiB0aGUgc2xhY2sgdmFyaWFibGVzIGluIFNWTT8gKFNlbGVjdCBhbGwgdGhhdCBhcHBseSkiLCAidHlwZSI6ICJtYW55X2Nob2ljZSIsICJhbnN3ZXJzIjogW3siYW5zd2VyIjogIlRvIGRldGVybWluZSB0aGUgbnVtYmVyIG9mIHN1cHBvcnQgdmVjdG9ycyIsICJjb3JyZWN0IjogZmFsc2UsICJmZWVkYmFjayI6ICJXcm9uZy4ifSwgeyJhbnN3ZXIiOiAiVG8gYWRqdXN0IHRoZSBrZXJuZWwgZnVuY3Rpb24iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIlRvIGNvbnRyb2wgdGhlIHJlZ3VsYXJpemF0aW9uIHN0cmVuZ3RoIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiVG8gcGVuYWxpemUgbWlzY2xhc3NpZmllZCBkYXRhIHBvaW50cyIsICJjb3JyZWN0IjogdHJ1ZSwgImZlZWRiYWNrIjogIkNvcnJlY3QuIn1dfV0=

W3sicXVlc3Rpb24iOiAiV2hpY2ggU1ZNIHZhcmlhbnQgaXMgc3VpdGFibGUgZm9yIGhhbmRsaW5nIG5vaXN5IGRhdGE/IiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJIYXJkIE1hcmdpbiBTVk0iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIkxpbmVhciBTVk0iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIlNvZnQgTWFyZ2luIFNWTSIsICJjb3JyZWN0IjogdHJ1ZSwgImZlZWRiYWNrIjogIkNvcnJlY3QuIn0sIHsiYW5zd2VyIjogIk5vbi1saW5lYXIgU1ZNIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9XX1d

W3sicXVlc3Rpb24iOiAiV2hpY2gga2VybmVsIGZ1bmN0aW9uIGlzIHN1aXRhYmxlIGZvciB0ZXh0IGNsYXNzaWZpY2F0aW9uIHRhc2tzPyAoU2VsZWN0IGFsbCB0aGF0IGFwcGx5KSIsICJ0eXBlIjogIm1hbnlfY2hvaWNlIiwgImFuc3dlcnMiOiBbeyJhbnN3ZXIiOiAiTGluZWFyIGtlcm5lbCIsICJjb3JyZWN0IjogdHJ1ZSwgImZlZWRiYWNrIjogIkNvcnJlY3QuIn0sIHsiYW5zd2VyIjogIlBvbHlub21pYWwga2VybmVsIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJSYWRpYWwgYmFzaXMgZnVuY3Rpb24gKFJCRikga2VybmVsIiwgImNvcnJlY3QiOiB0cnVlLCAiZmVlZGJhY2siOiAiQ29ycmVjdC4ifSwgeyJhbnN3ZXIiOiAiU2lnbW9pZCBrZXJuZWwiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn1dfV0=

W3sicXVlc3Rpb24iOiAiV2hpY2ggU1ZNIHZhcmlhbnQgaXMgc3VpdGFibGUgZm9yIGhhbmRsaW5nIGltYmFsYW5jZWQgZGF0YXNldHM/IiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJIYXJkIE1hcmdpbiBTVk0iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIkxpbmVhciBTVk0iLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn0sIHsiYW5zd2VyIjogIk5vbi1saW5lYXIgU1ZNIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJTb2Z0IE1hcmdpbiBTVk0iLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9XX1d

W3sicXVlc3Rpb24iOiAiSW4gU1ZNLCB3aGF0IGlzIHRoZSBwdXJwb3NlIG9mIHRoZSBnYW1tYSBwYXJhbWV0ZXIgaW4gdGhlIFJCRiBrZXJuZWw/IChTZWxlY3QgYWxsIHRoYXQgYXBwbHkpIiwgInR5cGUiOiAibWFueV9jaG9pY2UiLCAiYW5zd2VycyI6IFt7ImFuc3dlciI6ICJUbyBhZGp1c3QgdGhlIHJlZ3VsYXJpemF0aW9uIHN0cmVuZ3RoIiwgImNvcnJlY3QiOiBmYWxzZSwgImZlZWRiYWNrIjogIldyb25nLiJ9LCB7ImFuc3dlciI6ICJUbyBkZXRlcm1pbmUgdGhlIG51bWJlciBvZiBzdXBwb3J0IHZlY3RvcnMiLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9LCB7ImFuc3dlciI6ICJUbyBjb250cm9sIHRoZSBrZXJuZWwgd2lkdGgiLCAiY29ycmVjdCI6IHRydWUsICJmZWVkYmFjayI6ICJDb3JyZWN0LiJ9LCB7ImFuc3dlciI6ICJUbyBzZXQgdGhlIGxlYXJuaW5nIHJhdGUiLCAiY29ycmVjdCI6IGZhbHNlLCAiZmVlZGJhY2siOiAiV3JvbmcuIn1dfV0=

The support vector machine is a type of supervised learning algorithm used for classification and regression analysis. The support vector machine is based on the idea of finding a hyperplane that best separates the data points into different classes. The hyperplane is chosen such that it maximizes the margin between the two classes. The support vector machine is a generalization of the maximal margin classifier and the support vector classifier. Therefore, before starting, we need to touch on a little maximal margin classifier and the support vector classifier.

In [18]:
from jupyterquiz import display_quiz
display_quiz('#a')

Maximal Margin Classifier¶

The maximal margin classifier is a simple linear classifier that separates the data points into different classes by finding a hyperplane that maximizes the margin between the two classes. The margin is defined as the distance between the hyperplane and the closest data points from each class. The maximal margin classifier is a special case of the support vector classifier.

The maximal margin classifier is designed specifically for linearly separable data, which refers to the condition in which data can be separated linearly using a hyperplane. However, this classifier has some drawbacks. It is heavily reliant on the support vector and changes as support vectors change, which makes it tend to overfit. It also can’t be used for data that isn’t linearly separable, which makes it inefficient for the majority of real-world data that is non-linear.

In [3]:
display_quiz('#i')

sfsdgeg

Support Vector Classifier¶

The support vector classifier is a linear classifier that separates the data points into different classes by finding a hyperplane that maximizes the margin between the two classes. The margin is defined as the distance between the hyperplane and the closest data points from each class. The support vector classifier is a special case of the support vector machine.

The support vector classifier is an extension of the maximal margin classifier and is less sensitive to individual data. Since it allows certain data to be misclassified, it’s also known as the “Soft Margin Classifier”. It creates a budget under which the misclassification allowance is granted. This classifier covers the drawbacks of the maximal margin classifier by allowing for some misclassification and being less reliant on the support vector.

In [4]:
display_quiz('#i1')

But what about whis type of data?

No matter where we put the classifier, there will be a lot of missclassification

How do SVM covers drawbacks of SVC?¶

The support vector machine covers the drawbacks of the support vector classifier by using a kernel function to transform the input data into a higher-dimensional space. This allows the support vector machine to handle non-linearly separable data.

In [5]:
display_quiz("#b")

Hyperplanes¶

Hyperplanes are decision boundaries that help classify the data points. Data points falling on either side of the hyperplane can be attributed to different classes. Also, the dimension of the hyperplane depends upon the number of features. If the number of input features is 2, then the hyperplane is just a line. If the number of input features is 3, then the hyperplane becomes a two-dimensional plane. It becomes difficult to imagine when the number of features exceeds 3.

In [6]:
display_quiz("#c2")
In [7]:
display_quiz("#a4")

Understanding the mathematics behind SVM¶

Many people skip the math intuition behind this algorithm because it is pretty hard to digest. Here in this section, we’ll try to understand each and every step working under the hood.

Understanding Dot-Product¶

sfsdgeg

Here a and b are 2 vectors, to find the dot product between these 2 vectors we first find the magnitude of both the vectors (to find magnitude we use the Pythagorean theorem or the distance formula). After finding the magnitude we simply multiply it with the cosine angle between both the vectors. Mathematically it can be written as:

$$\overrightarrow{a} \cdot \overrightarrow{b} = \vert a \vert cos\theta \cdot \vert b \vert$$

Now in SVM we just need the projection of A not the magnitude of B, I’ll tell you why later. To just get the projection we can simply take the unit vector of B. Hence now the equation becomes:

$$\overrightarrow{a} \cdot \overrightarrow{b} = \vert a \vert cos\theta \cdot \text{unit vector of }b$$

Use of Dot Product in SVM¶

Consider a random point X and we want to know whether it lies on the right side of the plane or the left side of the plane (positive or negative).

sfsdgeg

To find this first we assume this point is a vector (X) and then we make a vector (w) which is perpendicular to the hyperplane. Let’s say the distance of vector w from origin to decision boundary is ‘c’. Now we take the projection of X vector on w.

sfsdgeg

We already know that projection of any vector or another vector is called dot-product. Hence, we take the dot product of x and w vectors. If the dot product is greater than ‘c’ then we can say that the point lies on the right side. If the dot product is less than ‘c’ then the point is on the left side and if the dot product is equal to ‘c’ then the point lies on the decision boundary.

$\overrightarrow{x} \cdot \overrightarrow{w} = c$ (the point lies on the decision boundary)
$\overrightarrow{x} \cdot \overrightarrow{w} > c$ (positive samples)
$\overrightarrow{x} \cdot \overrightarrow{w} > c$ (negative samples)

Equation of the Hyperplane, Margin¶

This equation is derived from two-dimensional vectors. But in fact, it also works for any number of dimensions. Equation of the hyperplane:

$$\omega \cdot x + b = 0$$

Distance from a data point to the decision boundary:
$$\text{Margin} = \frac{1}{\Vert \omega \Vert}$$

In [8]:
display_quiz("#b2")

Task of SVM¶

In task of SVM classification problem where $x = \mathbb{R}^p$ and $y = \{-1, +1\}$, we aim to find parameters $w$ in $\mathbb{R}^p$ and $b$ in $\mathbb{R}$. The classification algorithm $a(x, w)$ is defined as $\text{sign}(\langle x, w \rangle - b)$.

Let's build simple SVM classifier with some predefined x and w and use it on test data

In [9]:
import numpy as np
import plotly.graph_objects as go

X = np.random.randn(20, 2)
y = np.repeat([-1, 1], 10)
X[y == 1] += 1

w = np.array((1.5, 1.2))
b = 1.0

def simple_svm_classifier(x, w, b):
    return np.sign(np.dot(x, w) - b)

predictions = simple_svm_classifier(X, w, b)

fig = go.Figure()

actual_trace = go.Scatter(x=X[:, 0], y=X[:, 1], mode='markers',
                          marker=dict(color=y, colorscale='Viridis', line=dict(width=1)),
                          name='Actual Values')

predicted_trace = go.Scatter(x=X[:, 0], y=X[:, 1], mode='markers',
                             marker=dict(color=predictions, colorscale='Viridis', line=dict(width=1)),
                             name='Predicted Values')

fig.add_traces([actual_trace, predicted_trace])

buttons = [
    {"label": "Show Actual Values",
     "method": "update",
     "args": [{"visible": [True, False]},
              {"title": "SVM Classifier - Actual Values"}]},
    {"label": "Show Predicted Values",
     "method": "update",
     "args": [{"visible": [False, True]},
              {"title": "SVM Classifier - Predicted Values"}]}
]

fig.update_layout(
    updatemenus=[
        {
            "buttons": buttons,
            "direction": "down",
            "pad": {"r": 10, "t": 10},
            "showactive": True,
            "x": 0.33,
            "xanchor": "left",
            "y": 1.12,
            "yanchor": "top"
        }
    ],
    title='SVM Classifier - Actual vs Predicted',
    xaxis_title='Feature 1',
    yaxis_title='Feature 2',
    width=1000,
    height=600
)

# Display the plot
fig.show()

On the charts above, we can see actual and predicted data classification. Now it would be great to see hyperplane or decision boundary on the chart above. Equation of decision boundary for 2 feature classification will be:

$$\omega0 \cdot x0 + \omega1 \cdot x1 + b = 0$$
In [10]:
import numpy as np
import plotly.graph_objects as go

# Create x_range and calculate y_range for the decision boundary
x_range = np.linspace(-5, 5, 100)
y_range = (-w[0] * x_range + b) / w[1]

fig = go.Figure()

fig.add_trace(go.Scatter(x=X[:, 0], y=X[:, 1], mode='markers',
                         marker=dict(color=predictions, colorscale='Viridis', line=dict(width=1)),
                         name='Predicted Values'))

fig.add_trace(go.Scatter(x=x_range, y=y_range, mode='lines',
                         line=dict(color='blue'),
                         name='Decision Boundary'))

fig.update_layout(title='Predicted Values with Decision Boundary',
                  xaxis=dict(range=[-3, 3], title='Feature 1'),
                  yaxis=dict(range=[-3, 3], title='Feature 2'),
                  width=1000, height=500)

fig.show()

We use a method for minimizing the approximate regularized empirical risk:

$$ \sum_{i=1}^{n} (1 - M_i(W, W_0)) + \frac{1}{2}C ||W||^2 \rightarrow \min_{W, W_0} $$

where $M_i(W, W_0) = Y_i(\langle x_i, W \rangle - W_0)$ is the margin of object $x_i$.

$\sum_{i=1}^{n} (1 - M_i(W, W_0))$ is Hinge Loss Function that using for punish forecasts that are off margin

$\frac{1}{2}C ||W||^2$ is Regularization Term that help model to control its complexity

Let's build SVM model and apply information above to its training

In [11]:
#Creating model that will calculate proper w and b
class SVM:
    def __init__(self, learning_rate=0.001, lambda_param=0.01, n_iters=1000):
        self.lr = learning_rate
        self.lambda_param = lambda_param
        self.n_iters = n_iters
        self.w = None
        self.b = None

    def fit(self, X, y):
        n_samples, n_features = X.shape
        y_ = np.where(y <= 0, -1, 1)
        self.w = np.zeros(n_features)
        self.b = 0

        for _ in range(self.n_iters):
            for idx, x_i in enumerate(X):
                margin = y_[idx] * (np.dot(x_i, self.w) - self.b) #Calculating margin
                condition = -1 <= margin >= 1
                regularization_term = 0.5 * self.lambda_param * np.linalg.norm(self.w)**2 #Calculating regularization term
                if condition:
                    self.w -= self.lr * regularization_term
                else:
                    self.w -= self.lr * (regularization_term - np.dot(x_i, y_[idx]))
                    self.b -= self.lr * y_[idx]
                

    def predict(self, X):
        return np.sign(np.dot(X, self.w) - self.b)#Use calculated w and b in classificator that we used previousy

Now it's time to fit our model on generated data and visualize result

In [12]:
import numpy as np
from sklearn.model_selection import train_test_split
import plotly.graph_objects as go

# Generate data
X = np.random.randn(400, 2)
y = np.repeat([-1, 1], 200)
X[y == 1] += 1

# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=123)

# Train the SVM
svm = SVM()
svm.fit(X_train, y_train)
predictions = svm.predict(X_test)

# Calculate accuracy
accuracy = np.mean(predictions == y_test) * 100
print(f"Accuracy: {accuracy}")

# Prepare data for hyperplane visualization
x_range = np.linspace(-5, 5, 100)
y_range = (-svm.w[0] * x_range + svm.b) / svm.w[1]

# Create a Plotly figure
fig = go.Figure()

# Add scatter plot for test data
fig.add_trace(go.Scatter(x=X_test[:, 0], y=X_test[:, 1], mode='markers',
                         marker=dict(color=y_test, colorscale='Viridis', line=dict(width=1)),
                         name='Test Data'))

# Add line plot for the hyperplane
fig.add_trace(go.Scatter(x=x_range, y=y_range, mode='lines',
                         line=dict(color='blue'),
                         name='Hyperplane'))

# Update layout
fig.update_layout(title='SVM Hyperplane Visualization',
                  xaxis_title='Feature 1',
                  yaxis_title='Feature 2',
                  width=800, height=600)

# Display the plot
fig.show()
Accuracy: 73.33333333333333

Kernels in Support Vector Machine¶

The most interesting feature of SVM is that it can even work with a non-linear dataset and for this, we use “Kernel Trick” which makes it easier to classifies the points. Suppose we have a dataset like this:

sfsdgeg

Here we see we cannot draw a single line or say hyperplane which can classify the points correctly. So what we do is try converting this lower dimension space to a higher dimension space using some quadratic functions which will allow us to find a decision boundary that clearly divides the data points. These functions which help us do this are called Kernels and which kernel to use is purely determined by hyperparameter tuning.

sfsdgeg

In [13]:
display_quiz("#b6")

Karush-Kuhn-Tucker conditions¶

When using the kernel trick in SVM, it is recommended to use the Karush-Kuhn-Tucker (KKT) condition and Lagrange multipliers. The KKT condition is a set of first-order necessary conditions for a solution in nonlinear programming to be optimal, provided that some regularity conditions are satisfied

Karush-Kuhn-Tucker Conditions¶

Consider the following general constrained optimization problem:

\begin{align*} \text{Minimize} \quad & f(x) \\ \text{Subject to} \quad & g_i(x) \leq 0, \quad i = 1, 2, \ldots, m \\ & h_j(x) = 0, \quad j = 1, 2, \ldots, p \end{align*}

The KKT conditions for this problem are as follows:

  1. Stationarity Condition: $$ \nabla f(x) + \sum_{i=1}^{m} \lambda_i \nabla g_i(x) + \sum_{j=1}^{p} \mu_j \nabla h_j(x) = 0 $$

  2. Primal Feasibility: $$g_i(x) \leq 0, \quad i = 1, 2, \ldots, m$$

  3. Dual Feasibility: $$\lambda_i \geq 0, \quad i = 1, 2, \ldots, m$$

  4. Complementary Slackness: $$ \lambda_i g_i(x) = 0, \quad i = 1, 2, \ldots, m $$

  5. Equality Constraint: $$ h_j(x) = 0, \quad j = 1, 2, \ldots, p $$

Lagrange Multipliers in Optimization¶

Lagrange multipliers are a powerful technique in optimization when dealing with equality constraints. In mathematical terms, if we want to optimize a function subject to some constraints, we can use Lagrange multipliers to find the critical points.

Consider the optimization problem:

$$ \text{Minimize } f(x, y) \text{ subject to } g(x, y) = 0 $$

The Lagrangian for this problem is given by:

$$ \mathcal{L}(x, y, \lambda) = f(x, y) - \lambda \cdot g(x, y) $$

where $\lambda$ is the Lagrange multiplier.

Lagrange Multiplier Formula¶

The stationary points of $$ \mathcal{L}(x, y, \lambda) $$ satisfy the following conditions:

\begin{align*} \frac{\partial \mathcal{L}}{\partial x} &= 0 \\ \frac{\partial \mathcal{L}}{\partial y} &= 0 \\ \frac{\partial \mathcal{L}}{\partial \lambda} &= 0 \\ g(x, y) &= 0 \end{align*}

Solving these equations simultaneously will give us the critical points.

Now let's create SVM model with kernel trick

In [14]:
class KernelSVM(SVM):
    def __init__(self, kernel, C=1.0, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.kernel = kernel
        self.C = C

    def fit(self, X, y):
        n_samples, n_features = X.shape
        y_ = np.where(y <= 0, -1, 1)
        self.alpha = np.zeros((n_samples))
        self.sv_idx = np.arange(0, n_samples)
        self.X = X
        self.y = y_

        K = np.zeros((n_samples, n_samples))
        for i in range(n_samples):
            for j in range(n_samples):
                K[i,j] = self.kernel(X[i], X[j])

        for _ in range(self.n_iters):
            for i in range(n_samples):
                if np.sum(K[:,i] * self.alpha * y_) - self.alpha[i] * y_[i] * K[i,i] >= 0:
                    self.alpha[i] -= self.lr * (1 - y_[i] * np.sum(K[:,i] * self.alpha * y_))
                else:
                    self.alpha[i] -= self.lr * (1 - y_[i] * np.sum(K[:,i] * self.alpha * y_) - 2 * self.lambda_param * self.alpha[i])

        self.sv_idx = np.where(self.alpha > 1e-5)[0]
        self.alpha = self.alpha[self.sv_idx]
        self.sv = X[self.sv_idx]
        self.sv_y = y_[self.sv_idx]

        self.b = 0
        for n in range(len(self.alpha)):
            self.b += self.sv_y[n]
            self.b -= np.sum(self.alpha * self.sv_y * K[self.sv_idx[n],self.sv_idx])
        self.b /= len(self.alpha)

    def project(self, X):
        y_predict = np.zeros(len(X))
        for i in range(len(X)):
            s = 0
            for a, sv_y, sv in zip(self.alpha, self.sv_y, self.sv):
                s += a * sv_y * self.kernel(X[i], sv)
            y_predict[i] = s
        return y_predict + self.b

    def predict(self, X):
        return np.sign(self.project(X))

def rbf_kernel(x1, x2, sigma=5.0):
    return np.exp(-np.linalg.norm(x1-x2)**2 / (2 * (sigma ** 2)))
In [15]:
svm = KernelSVM(rbf_kernel, learning_rate=0.001, lambda_param=0.01, n_iters=1000)

svm.fit(X_train, y_train)
predictions = svm.predict(X_test)

print(f"Accuracy: {np.mean(predictions == y_test) * 100}")
Accuracy: 66.66666666666666
In [16]:
import numpy as np
import plotly.graph_objects as go

def plot_decision_boundary_plotly(svm, X, y):
    h = .02 
    x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
    y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))

    # Predict on the grid
    Z = svm.predict(np.c_[xx.ravel(), yy.ravel()])
    Z = Z.reshape(xx.shape)

    fig = go.Figure()

    fig.add_trace(go.Contour(x=np.linspace(x_min, x_max, int((x_max - x_min) / h + 1)),
                             y=np.linspace(y_min, y_max, int((y_max - y_min) / h + 1)),
                             z=Z, colorscale='Viridis', opacity=0.8, name='Decision Boundary'))

    fig.add_trace(go.Scatter(x=X[:, 0], y=X[:, 1], mode='markers',
                             marker=dict(color=y, colorscale='Viridis', line=dict(width=1)),
                             name='Data Points'))

    fig.update_layout(title='SVM Decision Boundary with RBF Kernel',
                      xaxis_title='Sepal length',
                      yaxis_title='Sepal width',
                      xaxis=dict(range=[xx.min(), xx.max()]),
                      yaxis=dict(range=[yy.min(), yy.max()]),
                      width=800, height=600)

    fig.show()

plot_decision_boundary_plotly(svm, X, y)